Model Specification or
Programmatic generation
In [1]:
import numpy as np
In [2]:
a = np.zeros((2,2))
In [3]:
a
Out[3]:
In [4]:
a.shape
Out[4]:
In [5]:
np.reshape(a, (1,4))
Out[5]:
In [6]:
b = np.ones((2,2))
In [7]:
b
Out[7]:
In [8]:
np.sum(b, axis=1)
Out[8]:
In [9]:
import tensorflow as tf
In [10]:
tf.InteractiveSession()
Out[10]:
In [11]:
a = tf.zeros((2,2))
In [12]:
a
Out[12]:
In [13]:
b = tf.ones((2,2))
In [14]:
b
Out[14]:
In [15]:
tf.reduce_sum(b, reduction_indices=1).eval()
Out[15]:
In [16]:
a.get_shape()
Out[16]:
In [17]:
tf.reshape(a, (1,4)).eval()
Out[17]:
In [18]:
# TensorFlow computations define a computation graph
# This means no numerical value until evaluated explicitly
In [19]:
a = np.zeros((2,2))
In [20]:
a
Out[20]:
In [21]:
ta = tf.zeros((2,2))
In [22]:
print(a)
In [23]:
print(ta)
In [24]:
print(ta.eval())
In [25]:
# A session object encapsulates the environment in which
# Tensor objects are evaluated
In [26]:
a = tf.constant(5.0)
In [27]:
a
Out[27]:
In [28]:
b = tf.constant(6.0)
In [29]:
b
Out[29]:
In [30]:
c = a * b
In [31]:
with tf.Session() as session:
print(session.run(c))
print(c.eval)
NOTE-1
tf.InteractiveSession()
ipython
NOTE-2
session.run(c)
constant
tensors
In [32]:
w1 = tf.ones((2,2))
w1
Out[32]:
In [33]:
w2 = tf.Variable(tf.zeros((2,2)), name='weights')
w2
Out[33]:
In [34]:
with tf.Session() as sess:
# this is a tensor flow constant
print(sess.run(w1))
# this is a tensor flow variable
# please note the init call
sess.run(tf.global_variables_initializer())
print(sess.run(w2))
In [35]:
W = tf.Variable(tf.zeros((2,2)), name="weights")
# variable objects can be initialized from constants
In [36]:
R = tf.Variable(tf.random_normal((2,2)), name="random_weights")
# variable objects can be initialized from random values
In [37]:
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
print(sess.run(W))
print(sess.run(R))
In [38]:
state = tf.Variable(0, name="counter")
In [39]:
new_value = tf.add(state, tf.constant(1))
# think of this as doing
# new_value = state + 1
In [40]:
update = tf.assign(state, new_value)
# think of this as state = new_value
In [41]:
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
print(sess.run(state))
for _ in range(3):
sess.run(update)
print(sess.run(state))
#
# state = 0
#
# print(state)
# for _ in range(3)
# state = state + 1
# print(state)
#
In [42]:
input1 = tf.constant(3.0)
In [43]:
input2 = tf.constant(2.0)
In [44]:
input3 = tf.constant(5.0)
In [45]:
intermed = tf.add(input2, input3)
In [46]:
mul = tf.multiply(input1, intermed)
In [47]:
with tf.Session() as sess:
result = sess.run([mul, intermed])
print(result)
# calling sess.run(var) on a tf.Session() object
# retrives its value.
# if you want to retrieve multiple variables
# simultaneously can do
# like sess.run([var1, var2])
In [48]:
a = np.zeros((3,3))
In [49]:
ta = tf.convert_to_tensor(a)
In [50]:
with tf.Session() as sess:
print(sess.run(ta))
tf.placeholder
variablestf.placeholder
variables or their names
to data like numpy arrays, lists, etc.. feed_dict
In [51]:
# define placeholder objects for data entry
input1 = tf.placeholder(tf.float32)
input2 = tf.placeholder(tf.float32)
output = tf.multiply(input1, input2)
In [52]:
with tf.Session() as sess:
print(sess.run([output], feed_dict={input1:[7.], input2:[2.] }))
# fetch value of input from computation graph
# feed data into the compuation graph
tf.variable_scope()
provides simple name-spacing to avoid clashestf.get_variable()
creates/accesses variables from withing a variable scope
In [53]:
# with tf.variable_scope("foo"):
# with tf.variable_scope("bar"):
# v = tf.get_variable("v", [1])
# assert v.name == "foo/bar/v:0"
In [54]:
# with tf.variable_scope("foo"):
# v = tf.get_variable("v", [1])
# tf.get_variable_scope().reuse_variables()
# v1 = tf.get_variable("v", [1])
# assert v1 == v
In [55]:
#
# with tf.variable_scope("foo"):
# v = tf.get_variable("v", [1])
# assert v.name == "foo/v:0"
#
#
# with tf.variable_scope("foo"):
# v = tf.get_variable("v", [1])
# with tf.variable_scope("foo", reuse=True):
# v1 = tf.get_variable("v", [1])
# assert v1 = v
#
In [56]:
# this is our first tensorflow line of code
# 1. its a tensorflow constant !
# welcome to tensorflow land
x = tf.constant(35, name='x')
In [57]:
y = x + 5
In [58]:
print(y)
In [59]:
with tf.Session() as session:
print(session.run(y))
In [60]:
%matplotlib inline
In [61]:
import matplotlib.image as mpimg
In [62]:
import matplotlib.pyplot as plt
In [63]:
# which image
filename = "ganesha.jpg"
In [64]:
# load image
raw_image_data = mpimg.imread(filename)
In [65]:
# Lord Ganesha was a scribe for Mr. Veda Vyasa
# who was narrating the Mahabharata.
#
# Later today we want to see if GAN's can learn
# the joint distribution over b/w images of Ganesha's
#
# For now, here is how, our god looks like ...
#
# Notice that there are 13 discrete features.
plt.imshow(raw_image_data)
Out[65]:
In [66]:
# create a
# 1, tenforflow constant (last time)
# 2. tensorflow variable (now)
x = tf.Variable(raw_image_data, name='x')
In [67]:
# tf.initialize_all_variables() was deprecated recently
model = tf.global_variables_initializer()
In [68]:
with tf.Session() as session:
# perform a basic operation
transpose_op = tf.transpose(x, perm=[1, 0, 2])
session.run(model)
result = session.run(transpose_op)
In [69]:
# he may not like it, but here is the transpose
plt.imshow(result)
Out[69]:
In [70]:
x = tf.placeholder("float", 3)
# size is optional, but helps
In [71]:
y = x * 2
In [72]:
with tf.Session() as session:
result = session.run(y, feed_dict={x: [1, 2, 3]})
print(result)
In [73]:
x = tf.placeholder("float", [None, 3])
# size can be multidimensional
# None means , you dont know the size now
# Like Data sets used in ML
# You dont want to hardcode the number of samples
In [74]:
y = x * 2
In [75]:
x_data = [[1, 2, 3],
[4, 5, 6],]
#
# this is 2 by 3
# can be 3 by 3
# can be 4 by 3 ...
In [76]:
with tf.Session() as session:
result = session.run(y, feed_dict={x: x_data})
print(result)
In [77]:
image = tf.placeholder("uint8", [None, None, 3])
In [78]:
reverse = tf.reverse(image, [True, False]) # [True, False, False]
In [79]:
with tf.Session() as session:
result = session.run(reverse, feed_dict={image: raw_image_data})
print(result.shape)
In [80]:
plt.imshow(result)
plt.show()
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]: